[[package]]
name = "curl"
version = "0.1.0"
-source = "git+https://github.com/carllerche/curl-rust#dbd06414ba879572faae75bfd0320a38fbbc3518"
+source = "git+https://github.com/carllerche/curl-rust#e6e31d8d601bffdf3c9b31993e6ffe3bc8a99618"
dependencies = [
"curl-sys 0.1.0 (git+https://github.com/carllerche/curl-rust)",
"url 0.1.0 (git+https://github.com/servo/rust-url)",
[[package]]
name = "curl-sys"
version = "0.1.0"
-source = "git+https://github.com/carllerche/curl-rust#dbd06414ba879572faae75bfd0320a38fbbc3518"
+source = "git+https://github.com/carllerche/curl-rust#e6e31d8d601bffdf3c9b31993e6ffe3bc8a99618"
dependencies = [
"libz-sys 0.0.1 (git+https://github.com/alexcrichton/libz-sys)",
- "openssl-sys 0.0.1 (git+https://github.com/alexcrichton/openssl-sys)",
+ "openssl-sys 0.0.1 (git+https://github.com/sfackler/rust-openssl)",
"pkg-config 0.0.1 (git+https://github.com/alexcrichton/pkg-config-rs)",
]
[[package]]
name = "docopt"
version = "0.6.8"
-source = "git+https://github.com/docopt/docopt.rs#0b184b15ed5ea7e3c180b088e8f99baa4427f989"
+source = "git+https://github.com/docopt/docopt.rs#911d25c7328c0fdc6499757b9dcb831472e3cc13"
[[package]]
name = "encoding"
-version = "0.2.0"
-source = "git+https://github.com/lifthrasiir/rust-encoding#a06637cc6d0da37c12c68661e2ee9ca1999764a4"
+version = "0.2.1"
+source = "git+https://github.com/lifthrasiir/rust-encoding#6a5a95950572590485cbbf64509036b252339205"
dependencies = [
"encoding-index-japanese 1.0.20140915 (git+https://github.com/lifthrasiir/rust-encoding)",
"encoding-index-korean 1.0.20140915 (git+https://github.com/lifthrasiir/rust-encoding)",
[[package]]
name = "encoding-index-japanese"
version = "1.0.20140915"
-source = "git+https://github.com/lifthrasiir/rust-encoding#a06637cc6d0da37c12c68661e2ee9ca1999764a4"
+source = "git+https://github.com/lifthrasiir/rust-encoding#6a5a95950572590485cbbf64509036b252339205"
+dependencies = [
+ "encoding_index_tests 0.1.0 (git+https://github.com/lifthrasiir/rust-encoding)",
+]
[[package]]
name = "encoding-index-korean"
version = "1.0.20140915"
-source = "git+https://github.com/lifthrasiir/rust-encoding#a06637cc6d0da37c12c68661e2ee9ca1999764a4"
+source = "git+https://github.com/lifthrasiir/rust-encoding#6a5a95950572590485cbbf64509036b252339205"
+dependencies = [
+ "encoding_index_tests 0.1.0 (git+https://github.com/lifthrasiir/rust-encoding)",
+]
[[package]]
name = "encoding-index-simpchinese"
version = "1.0.20140915"
-source = "git+https://github.com/lifthrasiir/rust-encoding#a06637cc6d0da37c12c68661e2ee9ca1999764a4"
+source = "git+https://github.com/lifthrasiir/rust-encoding#6a5a95950572590485cbbf64509036b252339205"
+dependencies = [
+ "encoding_index_tests 0.1.0 (git+https://github.com/lifthrasiir/rust-encoding)",
+]
[[package]]
name = "encoding-index-singlebyte"
version = "1.0.20140915"
-source = "git+https://github.com/lifthrasiir/rust-encoding#a06637cc6d0da37c12c68661e2ee9ca1999764a4"
+source = "git+https://github.com/lifthrasiir/rust-encoding#6a5a95950572590485cbbf64509036b252339205"
+dependencies = [
+ "encoding_index_tests 0.1.0 (git+https://github.com/lifthrasiir/rust-encoding)",
+]
[[package]]
name = "encoding-index-tradchinese"
version = "1.0.20140915"
-source = "git+https://github.com/lifthrasiir/rust-encoding#a06637cc6d0da37c12c68661e2ee9ca1999764a4"
+source = "git+https://github.com/lifthrasiir/rust-encoding#6a5a95950572590485cbbf64509036b252339205"
+dependencies = [
+ "encoding_index_tests 0.1.0 (git+https://github.com/lifthrasiir/rust-encoding)",
+]
+
+[[package]]
+name = "encoding_index_tests"
+version = "0.1.0"
+source = "git+https://github.com/lifthrasiir/rust-encoding#6a5a95950572590485cbbf64509036b252339205"
[[package]]
name = "flate2"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/flate2-rs#fc04214981c39633eb3859bd28389fc448d0e9fc"
+source = "git+https://github.com/alexcrichton/flate2-rs#f81e6dc401b2bf32c8274fa1b8327699fb9345c0"
dependencies = [
"miniz-sys 0.0.1 (git+https://github.com/alexcrichton/flate2-rs)",
]
[[package]]
name = "gcc"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/gcc-rs#f23b4ba15348508b9ac557fc7eed2733f65bc1ee"
+source = "git+https://github.com/alexcrichton/gcc-rs#f25b3ba9c40303781189cc137fb98fffe5b56de7"
[[package]]
name = "git2"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/git2-rs#afec995029fae2d1eedf4d635b2131080b5b196d"
+source = "git+https://github.com/alexcrichton/git2-rs#f188fc297b00261372bd3b4f7855cd8082e78cfc"
dependencies = [
"libgit2-sys 0.0.1 (git+https://github.com/alexcrichton/git2-rs)",
"time 0.0.1 (git+https://github.com/rust-lang/time)",
[[package]]
name = "glob"
version = "0.0.1"
-source = "git+https://github.com/rust-lang/glob#a469534627dec112d371f5acb95a561ea8940e69"
+source = "git+https://github.com/rust-lang/glob#5d7ad1dc2f10f8e23b93c025cf5b984d79cf7169"
[[package]]
name = "hamcrest"
version = "0.1.0"
-source = "git+https://github.com/carllerche/hamcrest-rust.git#998036d000095f72c8bc86f2c412f6432f7ccc3b"
+source = "git+https://github.com/carllerche/hamcrest-rust.git#2b9bd6cdae5dcf08acac84371fe889dc8eb5c528"
[[package]]
name = "libgit2-sys"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/git2-rs#afec995029fae2d1eedf4d635b2131080b5b196d"
+source = "git+https://github.com/alexcrichton/git2-rs#f188fc297b00261372bd3b4f7855cd8082e78cfc"
dependencies = [
"libssh2-sys 0.0.1 (git+https://github.com/alexcrichton/ssh2-rs)",
- "openssl-sys 0.0.1 (git+https://github.com/alexcrichton/openssl-sys)",
+ "openssl-sys 0.0.1 (git+https://github.com/sfackler/rust-openssl)",
"pkg-config 0.0.1 (git+https://github.com/alexcrichton/pkg-config-rs)",
]
[[package]]
name = "libssh2-sys"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/ssh2-rs#27d760680cf7b756298c8cb61fabe3aaefe0e84a"
+source = "git+https://github.com/alexcrichton/ssh2-rs#6118d76bb383420be32160af866cbe5fc7141f42"
dependencies = [
"libz-sys 0.0.1 (git+https://github.com/alexcrichton/libz-sys)",
- "openssl-sys 0.0.1 (git+https://github.com/alexcrichton/openssl-sys)",
+ "openssl-sys 0.0.1 (git+https://github.com/sfackler/rust-openssl)",
"pkg-config 0.0.1 (git+https://github.com/alexcrichton/pkg-config-rs)",
]
[[package]]
name = "miniz-sys"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/flate2-rs#fc04214981c39633eb3859bd28389fc448d0e9fc"
+source = "git+https://github.com/alexcrichton/flate2-rs#f81e6dc401b2bf32c8274fa1b8327699fb9345c0"
dependencies = [
"gcc 0.0.1 (git+https://github.com/alexcrichton/gcc-rs)",
]
[[package]]
name = "openssl-sys"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/openssl-sys#d66fe8400ab3cbb79ed405e26ae19c0d0cc20931"
+source = "git+https://github.com/sfackler/rust-openssl#af5533d936558a77c2de8c5d2d572bb1f89dfada"
dependencies = [
"pkg-config 0.0.1 (git+https://github.com/alexcrichton/pkg-config-rs)",
]
[[package]]
name = "semver"
version = "0.1.0"
-source = "git+https://github.com/rust-lang/semver#7dca047a9cd40e929a4545b37a1917daff82f156"
+source = "git+https://github.com/rust-lang/semver#1672853e20ef4bde672b08d55de40941d907649a"
[[package]]
name = "tar"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/tar-rs#47d2cc4b09e373a4cc7bee7c71ebf96b42ea620d"
+source = "git+https://github.com/alexcrichton/tar-rs#868632ea0dd36200377af2c445d71d387cba1702"
[[package]]
name = "time"
version = "0.0.1"
-source = "git+https://github.com/rust-lang/time#76698f52381a78cf654dbedfefd04c28a9806788"
+source = "git+https://github.com/rust-lang/time#3194d911a136a7bfa3218cdd5bd3379537de4133"
dependencies = [
"gcc 0.0.1 (git+https://github.com/alexcrichton/gcc-rs)",
]
[[package]]
name = "toml"
version = "0.1.0"
-source = "git+https://github.com/alexcrichton/toml-rs#ac6f3b27e03e7a5938fd4cae5fc3fecc5ab1a85f"
+source = "git+https://github.com/alexcrichton/toml-rs#4bb575fdaf5fd88580abbac6fdb057ef76d32251"
[[package]]
name = "url"
version = "0.1.0"
-source = "git+https://github.com/servo/rust-url#8a61b7654ab5378b488225a1d8a9cbbbcbd38894"
+source = "git+https://github.com/servo/rust-url#c3d32dae4c5641d034a848d42f88db48dd9feb96"
dependencies = [
- "encoding 0.2.0 (git+https://github.com/lifthrasiir/rust-encoding)",
+ "encoding 0.2.1 (git+https://github.com/lifthrasiir/rust-encoding)",
]
use cargo::ops;
use cargo::core::{MultiShell};
-use cargo::core::manifest::{BinTarget, ExampleTarget};
+use cargo::core::manifest::TargetKind;
use cargo::util::{CliResult, CliError, human};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
};
let (target_kind, name) = match (options.flag_bin, options.flag_example) {
- (Some(bin), None) => (BinTarget, Some(bin)),
- (None, Some(example)) => (ExampleTarget, Some(example)),
- (None, None) => (BinTarget, None),
+ (Some(bin), None) => (TargetKind::Bin, Some(bin)),
+ (None, Some(example)) => (TargetKind::Example, Some(example)),
+ (None, None) => (TargetKind::Bin, None),
(Some(_), Some(_)) => return Err(CliError::from_boxed(
human("specify either `--bin` or `--example`, not both"), 1)),
};
name: name.to_string(),
source_id: source_id.clone(),
req: VersionReq::any(),
- kind: Normal,
+ kind: Kind::Normal,
only_match_name: true,
optional: false,
features: Vec::new(),
/// Returns false if the dependency is only used to build the local package.
pub fn is_transitive(&self) -> bool {
match self.kind {
- Normal | Build => true,
- Development => false,
+ Kind::Normal | Kind::Build => true,
+ Kind::Development => false,
}
}
pub fn is_build(&self) -> bool {
- match self.kind { Build => true, _ => false }
+ match self.kind { Kind::Build => true, _ => false }
}
pub fn is_optional(&self) -> bool { self.optional }
/// Returns true if the default features of the dependency are requested.
impl LibKind {
pub fn from_str(string: &str) -> CargoResult<LibKind> {
match string {
- "lib" => Ok(Lib),
- "rlib" => Ok(Rlib),
- "dylib" => Ok(Dylib),
- "staticlib" => Ok(StaticLib),
+ "lib" => Ok(LibKind::Lib),
+ "rlib" => Ok(LibKind::Rlib),
+ "dylib" => Ok(LibKind::Dylib),
+ "staticlib" => Ok(LibKind::StaticLib),
_ => Err(human(format!("{} was not one of lib|rlib|dylib|staticlib",
string)))
}
/// Returns the argument suitable for `--crate-type` to pass to rustc.
pub fn crate_type(&self) -> &'static str {
match *self {
- Lib => "lib",
- Rlib => "rlib",
- Dylib => "dylib",
- StaticLib => "staticlib"
+ LibKind::Lib => "lib",
+ LibKind::Rlib => "rlib",
+ LibKind::Dylib => "dylib",
+ LibKind::StaticLib => "staticlib"
}
}
}
#[deriving(Show, Clone, Hash, PartialEq, Encodable)]
pub enum TargetKind {
- LibTarget(Vec<LibKind>),
- BinTarget,
- ExampleTarget,
+ Lib(Vec<LibKind>),
+ Bin,
+ Example,
}
#[deriving(Encodable, Decodable, Clone, PartialEq, Show)]
impl<E, S: Encoder<E>> Encodable<S, E> for Target {
fn encode(&self, s: &mut S) -> Result<(), E> {
let kind = match self.kind {
- LibTarget(ref kinds) => kinds.iter().map(|k| k.crate_type()).collect(),
- BinTarget => vec!("bin"),
- ExampleTarget => vec!["example"],
+ TargetKind::Lib(ref kinds) => kinds.iter().map(|k| k.crate_type()).collect(),
+ TargetKind::Bin => vec!("bin"),
+ TargetKind::Example => vec!["example"],
};
SerializedTarget {
src_path: &Path, profile: &Profile,
metadata: Metadata) -> Target {
Target {
- kind: LibTarget(crate_targets),
+ kind: TargetKind::Lib(crate_targets),
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
pub fn bin_target(name: &str, src_path: &Path, profile: &Profile,
metadata: Option<Metadata>) -> Target {
Target {
- kind: BinTarget,
+ kind: TargetKind::Bin,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
pub fn custom_build_target(name: &str, src_path: &Path, profile: &Profile,
metadata: Option<Metadata>) -> Target {
Target {
- kind: BinTarget,
+ kind: TargetKind::Bin,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
pub fn example_target(name: &str, src_path: &Path, profile: &Profile) -> Target {
Target {
- kind: ExampleTarget,
+ kind: TargetKind::Example,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
pub fn test_target(name: &str, src_path: &Path,
profile: &Profile, metadata: Metadata) -> Target {
Target {
- kind: BinTarget,
+ kind: TargetKind::Bin,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
pub fn bench_target(name: &str, src_path: &Path,
profile: &Profile, metadata: Metadata) -> Target {
Target {
- kind: BinTarget,
+ kind: TargetKind::Bin,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
pub fn is_lib(&self) -> bool {
match self.kind {
- LibTarget(_) => true,
+ TargetKind::Lib(_) => true,
_ => false
}
}
pub fn is_dylib(&self) -> bool {
match self.kind {
- LibTarget(ref kinds) => kinds.iter().any(|&k| k == Dylib),
+ TargetKind::Lib(ref kinds) => kinds.iter().any(|&k| k == LibKind::Dylib),
_ => false
}
}
pub fn is_rlib(&self) -> bool {
match self.kind {
- LibTarget(ref kinds) =>
- kinds.iter().any(|&k| k == Rlib || k == Lib),
+ TargetKind::Lib(ref kinds) =>
+ kinds.iter().any(|&k| k == LibKind::Rlib || k == LibKind::Lib),
_ => false
}
}
pub fn is_staticlib(&self) -> bool {
match self.kind {
- LibTarget(ref kinds) => kinds.iter().any(|&k| k == StaticLib),
+ TargetKind::Lib(ref kinds) => kinds.iter().any(|&k| k == LibKind::StaticLib),
_ => false
}
}
/// Returns true for binary, bench, and tests.
pub fn is_bin(&self) -> bool {
match self.kind {
- BinTarget => true,
+ TargetKind::Bin => true,
_ => false
}
}
/// Returns true for exampels
pub fn is_example(&self) -> bool {
match self.kind {
- ExampleTarget => true,
+ TargetKind::Example => true,
_ => false
}
}
/// Returns the arguments suitable for `--crate-type` to pass to rustc.
pub fn rustc_crate_types(&self) -> Vec<&'static str> {
match self.kind {
- LibTarget(ref kinds) => {
+ TargetKind::Lib(ref kinds) => {
kinds.iter().map(|kind| kind.crate_type()).collect()
},
- ExampleTarget |
- BinTarget => vec!("bin"),
+ TargetKind::Example |
+ TargetKind::Bin => vec!("bin"),
}
}
}
impl CargoError for PackageIdError {
fn description(&self) -> String {
match *self {
- InvalidVersion(ref v) => format!("invalid version: {}", *v),
- InvalidNamespace(ref ns) => format!("invalid namespace: {}", *ns),
+ PackageIdError::InvalidVersion(ref v) => {
+ format!("invalid version: {}", *v)
+ }
+ PackageIdError::InvalidNamespace(ref ns) => {
+ format!("invalid namespace: {}", *ns)
+ }
}
}
fn is_human(&self) -> bool { true }
impl PackageId {
pub fn new<T: ToSemver>(name: &str, version: T,
sid: &SourceId) -> CargoResult<PackageId> {
- let v = try!(version.to_semver().map_err(InvalidVersion));
+ let v = try!(version.to_semver().map_err(PackageIdError::InvalidVersion));
Ok(PackageId {
inner: Arc::new(PackageIdInner {
name: name.to_string(),
// We've previously loaded this source, and we've already locked it,
// so we're not allowed to change it even if `namespace` has a
// slightly different precise version listed.
- Some(&(_, Locked)) => return Ok(()),
+ Some(&(_, Kind::Locked)) => return Ok(()),
// If the previous source was not a precise source, then we can be
// sure that it's already been updated if we've already loaded it.
None => {}
}
- try!(self.load(namespace, Normal));
+ try!(self.load(namespace, Kind::Normal));
Ok(())
}
pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> {
for id in ids.iter() {
- try!(self.load(id, Locked));
+ try!(self.load(id, Kind::Locked));
}
Ok(())
}
pub fn add_overrides(&mut self, ids: Vec<SourceId>) -> CargoResult<()> {
for id in ids.iter() {
- try!(self.load(id, Override));
+ try!(self.load(id, Kind::Override));
}
Ok(())
}
try!(source.update());
drop(p);
- if kind == Override {
+ if kind == Kind::Override {
self.overrides.push(source_id.clone());
}
metadata: Option<Metadata>,
}
-pub enum ResolveMethod<'a> {
- ResolveEverything,
- ResolveRequired(/* dev_deps = */ bool,
- /* features = */ &'a [String],
- /* uses_default_features = */ bool,
- /* target_platform = */ Option<&'a str>),
+pub enum Method<'a> {
+ Everything,
+ Required(/* dev_deps = */ bool,
+ /* features = */ &'a [String],
+ /* uses_default_features = */ bool,
+ /* target_platform = */ Option<&'a str>),
}
impl Resolve {
}
/// Builds the list of all packages required to build the first argument.
-pub fn resolve<R: Registry>(summary: &Summary, method: ResolveMethod,
+pub fn resolve<R: Registry>(summary: &Summary, method: Method,
registry: &mut R) -> CargoResult<Resolve> {
log!(5, "resolve; summary={}", summary);
fn activate<R: Registry>(mut cx: Context,
registry: &mut R,
parent: &Summary,
- method: ResolveMethod)
+ method: Method)
-> CargoResult<CargoResult<Context>> {
// Extracting the platform request.
let platform = match method {
- ResolveRequired(_, _, _, platform) => platform,
- ResolveEverything => None,
+ Method::Required(_, _, _, platform) => platform,
+ Method::Everything => None,
};
// First, figure out our set of dependencies based on the requsted set of
cur: uint) -> CargoResult<CargoResult<Context>> {
if cur == deps.len() { return Ok(Ok(cx)) }
let (dep, ref candidates, ref features) = deps[cur];
- let method = ResolveRequired(false, features.as_slice(),
+ let method = Method::Required(false, features.as_slice(),
dep.uses_default_features(), platform);
let key = (dep.get_name().to_string(), dep.get_source_id().clone());
}
fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary,
- method: ResolveMethod)
+ method: Method)
-> CargoResult<HashMap<&'a str,
(&'a Dependency, Vec<String>)>> {
let dev_deps = match method {
- ResolveEverything => true,
- ResolveRequired(dev_deps, _, _, _) => dev_deps,
+ Method::Everything => true,
+ Method::Required(dev_deps, _, _, _) => dev_deps,
};
// First, filter by dev-dependencies
// Second, ignoring dependencies that should not be compiled for this platform
let mut deps = deps.filter(|d| {
match method {
- ResolveRequired(_, _, _, Some(ref platform)) => {
+ Method::Required(_, _, _, Some(ref platform)) => {
d.is_active_for_platform(platform.as_slice())
},
_ => true
// The all used features set is the set of features which this local package had
// enabled, which is later used when compiling to instruct the code what
// features were enabled.
-fn build_features(s: &Summary, method: ResolveMethod)
+fn build_features(s: &Summary, method: Method)
-> CargoResult<(HashMap<String, Vec<String>>, HashSet<String>)> {
let mut deps = HashMap::new();
let mut used = HashSet::new();
let mut visited = HashSet::new();
match method {
- ResolveEverything => {
+ Method::Everything => {
for key in s.get_features().keys() {
try!(add_feature(s, key.as_slice(), &mut deps, &mut used,
&mut visited));
&mut visited));
}
}
- ResolveRequired(_, requested_features, _, _) => {
+ Method::Required(_, requested_features, _, _) => {
for feat in requested_features.iter() {
try!(add_feature(s, feat.as_slice(), &mut deps, &mut used,
&mut visited));
}
}
match method {
- ResolveEverything | ResolveRequired(_, _, true, _) => {
+ Method::Everything | Method::Required(_, _, true, _) => {
if s.get_features().find_equiv("default").is_some() &&
!visited.contains_equiv("default") {
try!(add_feature(s, "default", &mut deps, &mut used,
use std::io::{IoResult, stderr};
use std::fmt::Show;
+use self::AdequateTerminal::{NoColor, Colored};
+
pub struct ShellConfig {
pub color: bool,
pub verbose: bool,
}
#[deriving(Encodable, Decodable, Show, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
-enum SourceKind {
- /// GitKind(<git reference>) represents a git repository
- GitKind(String),
+enum Kind {
+ /// Kind::Git(<git reference>) represents a git repository
+ Git(String),
/// represents a local path
- PathKind,
+ Path,
/// represents the central registry
- RegistryKind,
+ Registry,
}
type Error = Box<CargoError + Send>;
#[deriving(Eq, Clone)]
struct SourceIdInner {
url: Url,
- kind: SourceKind,
+ kind: Kind,
// e.g. the exact git revision of the specified branch for a Git Source
precise: Option<String>
}
impl SourceId {
- fn new(kind: SourceKind, url: Url) -> SourceId {
+ fn new(kind: Kind, url: Url) -> SourceId {
SourceId {
inner: Arc::new(SourceIdInner {
kind: kind,
},
"registry" => {
let url = url.to_url().unwrap();
- SourceId::new(RegistryKind, url)
+ SourceId::new(Kind::Registry, url)
.with_precise(Some("locked".to_string()))
}
"path" => SourceId::for_path(&Path::new(url.slice_from(5))).unwrap(),
pub fn to_url(&self) -> String {
match *self.inner {
- SourceIdInner { kind: PathKind, .. } => {
+ SourceIdInner { kind: Kind::Path, .. } => {
panic!("Path sources are not included in the lockfile, \
so this is unimplemented")
},
SourceIdInner {
- kind: GitKind(ref reference), ref url, ref precise, ..
+ kind: Kind::Git(ref reference), ref url, ref precise, ..
} => {
let ref_str = if reference.as_slice() != "master" {
format!("?ref={}", reference)
format!("git+{}{}{}", url, ref_str, precise_str)
},
- SourceIdInner { kind: RegistryKind, ref url, .. } => {
+ SourceIdInner { kind: Kind::Registry, ref url, .. } => {
format!("registry+{}", url)
}
}
// Pass absolute path
pub fn for_path(path: &Path) -> CargoResult<SourceId> {
let url = try!(path.to_url().map_err(human));
- Ok(SourceId::new(PathKind, url))
+ Ok(SourceId::new(Kind::Path, url))
}
pub fn for_git(url: &Url, reference: &str) -> SourceId {
- SourceId::new(GitKind(reference.to_string()), url.clone())
+ SourceId::new(Kind::Git(reference.to_string()), url.clone())
}
pub fn for_registry(url: &Url) -> SourceId {
- SourceId::new(RegistryKind, url.clone())
+ SourceId::new(Kind::Registry, url.clone())
}
/// Returns the `SourceId` corresponding to the main repository.
}
pub fn get_url(&self) -> &Url { &self.inner.url }
- pub fn is_path(&self) -> bool { self.inner.kind == PathKind }
- pub fn is_registry(&self) -> bool { self.inner.kind == RegistryKind }
+ pub fn is_path(&self) -> bool { self.inner.kind == Kind::Path }
+ pub fn is_registry(&self) -> bool { self.inner.kind == Kind::Registry }
pub fn is_git(&self) -> bool {
match self.inner.kind {
- GitKind(_) => true,
+ Kind::Git(_) => true,
_ => false
}
}
pub fn load<'a>(&self, config: &'a Config) -> Box<Source+'a> {
log!(5, "loading SourceId; {}", self);
match self.inner.kind {
- GitKind(..) => box GitSource::new(self, config) as Box<Source+'a>,
- PathKind => {
+ Kind::Git(..) => box GitSource::new(self, config) as Box<Source+'a>,
+ Kind::Path => {
let path = match self.inner.url.to_file_path() {
Ok(p) => p,
Err(()) => panic!("path sources cannot be remote"),
};
box PathSource::new(&path, self) as Box<Source>
},
- RegistryKind => {
+ Kind::Registry => {
box RegistrySource::new(self, config) as Box<Source+'a>
}
}
pub fn git_reference(&self) -> Option<&str> {
match self.inner.kind {
- GitKind(ref s) => Some(s.as_slice()),
+ Kind::Git(ref s) => Some(s.as_slice()),
_ => None,
}
}
pub fn is_default_registry(&self) -> bool {
match self.inner.kind {
- RegistryKind => {}
+ Kind::Registry => {}
_ => return false,
}
self.inner.url.to_string() == RegistrySource::default_url()
impl Show for SourceId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self.inner {
- SourceIdInner { kind: PathKind, ref url, .. } => url.fmt(f),
- SourceIdInner { kind: GitKind(ref reference), ref url,
+ SourceIdInner { kind: Kind::Path, ref url, .. } => url.fmt(f),
+ SourceIdInner { kind: Kind::Git(ref reference), ref url,
ref precise, .. } => {
try!(write!(f, "{}", url));
if reference.as_slice() != "master" {
}
Ok(())
},
- SourceIdInner { kind: RegistryKind, ref url, .. } => {
+ SourceIdInner { kind: Kind::Registry, ref url, .. } => {
write!(f, "registry {}", url)
}
}
if self.url == other.url { return true }
match (&self.kind, &other.kind, &self.url, &other.url) {
- (&GitKind(ref ref1), &GitKind(ref ref2), u1, u2) => {
+ (&Kind::Git(ref ref1), &Kind::Git(ref ref2), u1, u2) => {
ref1 == ref2 &&
git::canonicalize_url(u1) == git::canonicalize_url(u2)
}
fn hash(&self, into: &mut S) {
self.inner.kind.hash(into);
match *self.inner {
- SourceIdInner { kind: GitKind(..), ref url, .. } => {
+ SourceIdInner { kind: Kind::Git(..), ref url, .. } => {
git::canonicalize_url(url).hash(into)
}
_ => self.inner.url.hash(into),
#[cfg(test)]
mod tests {
- use super::{SourceId, GitKind};
+ use super::{SourceId, Kind};
use util::ToUrl;
#[test]
fn github_sources_equal() {
let loc = "https://github.com/foo/bar".to_url().unwrap();
- let s1 = SourceId::new(GitKind("master".to_string()), loc);
+ let s1 = SourceId::new(Kind::Git("master".to_string()), loc);
let loc = "git://github.com/foo/bar".to_url().unwrap();
- let s2 = SourceId::new(GitKind("master".to_string()), loc.clone());
+ let s2 = SourceId::new(Kind::Git("master".to_string()), loc.clone());
assert_eq!(s1, s2);
- let s3 = SourceId::new(GitKind("foo".to_string()), loc);
+ let s3 = SourceId::new(Kind::Git("foo".to_string()), loc);
assert!(s1 != s3);
}
}
use core::registry::PackageRegistry;
use core::{MultiShell, Source, SourceId, PackageSet, Package, Target, PackageId};
-use core::resolver;
+use core::resolver::Method;
use ops::{mod, BuildOutput};
use sources::{PathSource};
use util::config::{Config, ConfigValue};
try!(registry.add_overrides(override_ids));
let platform = target.as_ref().map(|e| e.as_slice()).or(Some(rustc_host.as_slice()));
- let method = resolver::ResolveRequired(dev_deps, features.as_slice(),
- !no_default_features,
- platform);
+ let method = Method::Required(dev_deps, features.as_slice(),
+ !no_default_features, platform);
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, package, method,
Some(&resolve), None));
use core::PackageId;
use core::registry::PackageRegistry;
-use core::{MultiShell, Source, Resolve, resolver};
+use core::{MultiShell, Source, Resolve};
+use core::resolver::Method;
use ops;
use sources::{PathSource};
use util::config::{Config};
let mut config = try!(Config::new(shell, None, None));
let mut registry = PackageRegistry::new(&mut config);
let resolve = try!(ops::resolve_with_previous(&mut registry, &package,
- resolver::ResolveEverything,
+ Method::Everything,
None, None));
try!(ops::write_pkg_lockfile(&package, &resolve));
Ok(())
let resolve = try!(ops::resolve_with_previous(&mut registry,
&package,
- resolver::ResolveEverything,
+ Method::Everything,
Some(&previous_resolve),
Some(&to_avoid)));
try!(ops::write_pkg_lockfile(&package, &resolve));
use ops;
use util::{CargoResult, human, process, ProcessError, Require};
-use core::manifest::{TargetKind, LibTarget, BinTarget, ExampleTarget};
+use core::manifest::TargetKind;
use core::source::Source;
use sources::PathSource;
let env = options.env;
let mut bins = root.get_manifest().get_targets().iter().filter(|a| {
let matches_kind = match target_kind {
- BinTarget => a.is_bin(),
- ExampleTarget => a.is_example(),
- LibTarget(_) => false,
+ TargetKind::Bin => a.is_bin(),
+ TargetKind::Example => a.is_example(),
+ TargetKind::Lib(_) => false,
};
let matches_name = name.as_ref().map_or(true, |n| n.as_slice() == a.get_name());
matches_kind && matches_name && a.get_profile().get_env() == env &&
use util::{mod, CargoResult, ChainError, internal, Config, profile};
use util::human;
-use super::{Kind, KindHost, KindTarget, Compilation, BuildConfig};
+use super::{Kind, Compilation, BuildConfig};
use super::TargetConfig;
use super::layout::{Layout, LayoutProxy};
use super::custom_build::BuildState;
#[deriving(Show)]
-pub enum PlatformRequirement {
- PlatformTarget,
- PlatformPlugin,
- PlatformPluginAndTarget,
+pub enum Platform {
+ Target,
+ Plugin,
+ PluginAndTarget,
}
pub struct Context<'a, 'b: 'a> {
package_set: &'a PackageSet,
target_dylib: Option<(String, String)>,
target_exe: String,
- requirements: HashMap<(&'a PackageId, &'a str), PlatformRequirement>,
+ requirements: HashMap<(&'a PackageId, &'a str), Platform>,
build_config: BuildConfig,
}
let targets = pkg.get_targets().iter();
for target in targets.filter(|t| t.get_profile().is_compile()) {
- self.build_requirements(pkg, target, PlatformTarget);
+ self.build_requirements(pkg, target, Platform::Target);
}
self.compilation.extra_env.insert("NUM_JOBS".to_string(),
Some(self.config.jobs().to_string()));
self.compilation.root_output =
- self.layout(pkg, KindTarget).proxy().dest().clone();
+ self.layout(pkg, Kind::Target).proxy().dest().clone();
self.compilation.deps_output =
- self.layout(pkg, KindTarget).proxy().deps().clone();
+ self.layout(pkg, Kind::Target).proxy().deps().clone();
return Ok(());
}
fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target,
- req: PlatformRequirement) {
+ req: Platform) {
- let req = if target.get_profile().is_for_host() {PlatformPlugin} else {req};
+ let req = if target.get_profile().is_for_host() {Platform::Plugin} else {req};
match self.requirements.entry((pkg.get_package_id(), target.get_name())) {
Occupied(mut entry) => match (*entry.get(), req) {
- (PlatformPlugin, PlatformPlugin) |
- (PlatformPluginAndTarget, PlatformPlugin) |
- (PlatformTarget, PlatformTarget) |
- (PlatformPluginAndTarget, PlatformTarget) |
- (PlatformPluginAndTarget, PlatformPluginAndTarget) => return,
+ (Platform::Plugin, Platform::Plugin) |
+ (Platform::PluginAndTarget, Platform::Plugin) |
+ (Platform::Target, Platform::Target) |
+ (Platform::PluginAndTarget, Platform::Target) |
+ (Platform::PluginAndTarget, Platform::PluginAndTarget) => return,
_ => *entry.get_mut() = entry.get().combine(req),
},
Vacant(entry) => { entry.set(req); }
match pkg.get_targets().iter().find(|t| t.get_profile().is_custom_build()) {
Some(custom_build) => {
- self.build_requirements(pkg, custom_build, PlatformPlugin);
+ self.build_requirements(pkg, custom_build, Platform::Plugin);
}
None => {}
}
}
pub fn get_requirement(&self, pkg: &'a Package,
- target: &'a Target) -> PlatformRequirement {
+ target: &'a Target) -> Platform {
let default = if target.get_profile().is_for_host() {
- PlatformPlugin
+ Platform::Plugin
} else {
- PlatformTarget
+ Platform::Target
};
self.requirements.get(&(pkg.get_package_id(), target.get_name()))
.map(|a| *a).unwrap_or(default)
pub fn layout(&self, pkg: &Package, kind: Kind) -> LayoutProxy {
let primary = pkg.get_package_id() == self.resolve.root();
match kind {
- KindHost => LayoutProxy::new(&self.host, primary),
- KindTarget => LayoutProxy::new(self.target.as_ref()
+ Kind::Host => LayoutProxy::new(&self.host, primary),
+ Kind::Target => LayoutProxy::new(self.target.as_ref()
.unwrap_or(&self.host),
primary),
}
/// If `plugin` is true, the pair corresponds to the host platform,
/// otherwise it corresponds to the target platform.
fn dylib(&self, kind: Kind) -> CargoResult<(&str, &str)> {
- let (triple, pair) = if kind == KindHost {
+ let (triple, pair) = if kind == Kind::Host {
(self.config.rustc_host(), &self.host_dylib)
} else {
(self.target_triple.as_slice(), &self.target_dylib)
} else {
if target.is_dylib() {
let plugin = target.get_profile().is_for_host();
- let kind = if plugin {KindHost} else {KindTarget};
+ let kind = if plugin {Kind::Host} else {Kind::Target};
let (prefix, suffix) = try!(self.dylib(kind));
ret.push(format!("{}{}{}", prefix, stem, suffix));
}
/// Get the target configuration for a particular host or target
fn target_config(&self, kind: Kind) -> &TargetConfig {
match kind {
- KindHost => &self.build_config.host,
- KindTarget => &self.build_config.target,
+ Kind::Host => &self.build_config.host,
+ Kind::Target => &self.build_config.target,
}
}
}
-impl PlatformRequirement {
- fn combine(self, other: PlatformRequirement) -> PlatformRequirement {
+impl Platform {
+ fn combine(self, other: Platform) -> Platform {
match (self, other) {
- (PlatformTarget, PlatformTarget) => PlatformTarget,
- (PlatformPlugin, PlatformPlugin) => PlatformPlugin,
- _ => PlatformPluginAndTarget,
+ (Platform::Target, Platform::Target) => Platform::Target,
+ (Platform::Plugin, Platform::Plugin) => Platform::Plugin,
+ _ => Platform::PluginAndTarget,
}
}
pub fn includes(self, kind: Kind) -> bool {
match (self, kind) {
- (PlatformPluginAndTarget, _) |
- (PlatformTarget, KindTarget) |
- (PlatformPlugin, KindHost) => true,
+ (Platform::PluginAndTarget, _) |
+ (Platform::Target, Kind::Target) |
+ (Platform::Plugin, Kind::Host) => true,
_ => false,
}
}
pub fn each_kind(self, f: |Kind|) {
match self {
- PlatformTarget => f(KindTarget),
- PlatformPlugin => f(KindHost),
- PlatformPluginAndTarget => { f(KindTarget); f(KindHost); }
+ Platform::Target => f(Kind::Target),
+ Platform::Plugin => f(Kind::Host),
+ Platform::PluginAndTarget => { f(Kind::Target); f(Kind::Host); }
}
}
}
use util::{internal, ChainError, Require};
use super::job::Work;
-use super::{fingerprint, process, KindTarget, KindHost, Kind, Context};
-use super::{PlatformPlugin, PlatformPluginAndTarget, PlatformTarget};
-use super::PlatformRequirement;
+use super::{fingerprint, process, Kind, Context, Platform};
use util::Freshness;
/// Contains the parsed output of a custom build script.
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
-pub fn prepare(pkg: &Package, target: &Target, req: PlatformRequirement,
+pub fn prepare(pkg: &Package, target: &Target, req: Platform,
cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> {
- let kind = match req { PlatformPlugin => KindHost, _ => KindTarget, };
+ let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, };
let (script_output, build_output) = {
- (cx.layout(pkg, KindHost).build(pkg),
- cx.layout(pkg, KindTarget).build_out(pkg))
+ (cx.layout(pkg, Kind::Host).build(pkg),
+ cx.layout(pkg, Kind::Target).build_out(pkg))
};
// Building the command to execute
.display().to_string()))
.env("NUM_JOBS", Some(cx.config.jobs().to_string()))
.env("TARGET", Some(match kind {
- KindHost => cx.config.rustc_host(),
- KindTarget => cx.target_triple(),
+ Kind::Host => cx.config.rustc_host(),
+ Kind::Target => cx.target_triple(),
}))
.env("DEBUG", Some(profile.get_debug().to_string()))
.env("OPT_LEVEL", Some(profile.get_opt_level().to_string()))
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
- try!(fs::mkdir_recursive(&cx.layout(pkg, KindTarget).build(pkg), USER_RWX));
- try!(fs::mkdir_recursive(&cx.layout(pkg, KindHost).build(pkg), USER_RWX));
+ try!(fs::mkdir_recursive(&cx.layout(pkg, Kind::Target).build(pkg), USER_RWX));
+ try!(fs::mkdir_recursive(&cx.layout(pkg, Kind::Host).build(pkg), USER_RWX));
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
}
}
let mut outputs = HashMap::new();
- let i1 = config.host.overrides.into_iter().map(|p| (p, KindHost));
- let i2 = config.target.overrides.into_iter().map(|p| (p, KindTarget));
+ let i1 = config.host.overrides.into_iter().map(|p| (p, Kind::Host));
+ let i2 = config.target.overrides.into_iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
match sources.get(&name) {
Some(id) => { outputs.insert((id.clone(), kind), output); }
BuildState { outputs: Mutex::new(outputs) }
}
- fn insert(&self, id: PackageId, req: PlatformRequirement,
+ fn insert(&self, id: PackageId, req: Platform,
output: BuildOutput) {
let mut outputs = self.outputs.lock();
match req {
- PlatformTarget => { outputs.insert((id, KindTarget), output); }
- PlatformPlugin => { outputs.insert((id, KindHost), output); }
+ Platform::Target => { outputs.insert((id, Kind::Target), output); }
+ Platform::Plugin => { outputs.insert((id, Kind::Host), output); }
// If this build output was for both the host and target platforms,
// we need to insert it at both places.
- PlatformPluginAndTarget => {
- outputs.insert((id.clone(), KindHost), output.clone());
- outputs.insert((id, KindTarget), output);
+ Platform::PluginAndTarget => {
+ outputs.insert((id.clone(), Kind::Host), output.clone());
+ outputs.insert((id, Kind::Target), output);
}
}
}
use util;
use util::{CargoResult, Fresh, Dirty, Freshness, internal, Require, profile};
-use super::{Kind, KindTarget};
+use super::Kind;
use super::job::Work;
use super::context::Context;
let _p = profile::start(format!("fingerprint build cmd: {}",
pkg.get_package_id()));
- // TODO: this should not explicitly pass KindTarget
- let kind = KindTarget;
+ // TODO: this should not explicitly pass Kind::Target
+ let kind = Kind::Target;
if pkg.get_manifest().get_build().len() == 0 && target.is_none() {
return Ok((Fresh, proc(_) Ok(()), proc(_) Ok(())))
/// then later on the entire graph is processed and compiled.
pub struct JobQueue<'a, 'b> {
pool: TaskPool,
- queue: DependencyQueue<(&'a PackageId, TargetStage),
+ queue: DependencyQueue<(&'a PackageId, Stage),
(&'a Package, Vec<(Job, Freshness)>)>,
tx: Sender<Message>,
rx: Receiver<Message>,
resolve: &'a Resolve,
packages: &'a PackageSet,
active: uint,
- pending: HashMap<(&'a PackageId, TargetStage), PendingBuild>,
+ pending: HashMap<(&'a PackageId, Stage), PendingBuild>,
state: HashMap<&'a PackageId, Freshness>,
ignored: HashSet<&'a PackageId>,
printed: HashSet<&'a PackageId>,
/// Each build step for a package is registered with one of these stages, and
/// each stage has a vector of work to perform in parallel.
#[deriving(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Show)]
-pub enum TargetStage {
- StageStart,
- StageBuildCustomBuild,
- StageRunCustomBuild,
- StageLibraries,
- StageBinaries,
- StageLibraryTests,
- StageBinaryTests,
+pub enum Stage {
+ Start,
+ BuildCustomBuild,
+ RunCustomBuild,
+ Libraries,
+ Binaries,
+ LibraryTests,
+ BinaryTests,
}
-type Message = (PackageId, TargetStage, Freshness, CargoResult<()>);
+type Message = (PackageId, Stage, Freshness, CargoResult<()>);
impl<'a, 'b> JobQueue<'a, 'b> {
pub fn new(resolve: &'a Resolve, packages: &'a PackageSet,
}
}
- pub fn enqueue(&mut self, pkg: &'a Package, stage: TargetStage,
+ pub fn enqueue(&mut self, pkg: &'a Package, stage: Stage,
jobs: Vec<(Job, Freshness)>) {
// Record the freshness state of this package as dirty if any job is
// dirty or fresh otherwise
/// The input freshness is from `dequeue()` and indicates the combined
/// freshness of all upstream dependencies. This function will schedule all
/// work in `jobs` to be executed.
- fn run(&mut self, pkg: &'a Package, stage: TargetStage, fresh: Freshness,
+ fn run(&mut self, pkg: &'a Package, stage: Stage, fresh: Freshness,
jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> {
let njobs = jobs.len();
let amt = if njobs == 0 {1} else {njobs};
// out any more information for a package after we've printed it once.
let print = !self.ignored.contains(&pkg.get_package_id());
let print = print && !self.printed.contains(&pkg.get_package_id());
- if print && (stage == StageLibraries ||
+ if print && (stage == Stage::Libraries ||
(total_fresh == Dirty && running.len() > 0)) {
self.printed.insert(pkg.get_package_id());
match total_fresh {
}
impl<'a> Dependency<(&'a Resolve, &'a PackageSet)>
- for (&'a PackageId, TargetStage)
+ for (&'a PackageId, Stage)
{
fn dependencies(&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet))
- -> Vec<(&'a PackageId, TargetStage)> {
+ -> Vec<(&'a PackageId, Stage)> {
// This implementation of `Dependency` is the driver for the structure
// of the dependency graph of packages to be built. The "key" here is
// a pair of the package being built and the stage that it's at.
}).unwrap())
});
match stage {
- StageStart => Vec::new(),
+ Stage::Start => Vec::new(),
// Building the build command itself starts off pretty easily,we
// just need to depend on all of the library stages of our own build
// dependencies (making them available to us).
- StageBuildCustomBuild => {
- let mut base = vec![(id, StageStart)];
+ Stage::BuildCustomBuild => {
+ let mut base = vec![(id, Stage::Start)];
base.extend(deps.filter(|&(_, dep)| dep.is_build())
- .map(|(id, _)| (id, StageLibraries)));
+ .map(|(id, _)| (id, Stage::Libraries)));
base
}
// own custom build command is actually built, and then we need to
// wait for all our dependencies to finish their custom build
// commands themselves (as they may provide input to us).
- StageRunCustomBuild => {
- let mut base = vec![(id, StageBuildCustomBuild)];
+ Stage::RunCustomBuild => {
+ let mut base = vec![(id, Stage::BuildCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
- .map(|(id, _)| (id, StageRunCustomBuild)));
+ .map(|(id, _)| (id, Stage::RunCustomBuild)));
base
}
// Building a library depends on our own custom build command plus
// all our transitive dependencies.
- StageLibraries => {
- let mut base = vec![(id, StageRunCustomBuild)];
+ Stage::Libraries => {
+ let mut base = vec![(id, Stage::RunCustomBuild)];
base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
- .map(|(id, _)| (id, StageLibraries)));
+ .map(|(id, _)| (id, Stage::Libraries)));
base
}
// Binaries only depend on libraries being available. Note that they
// do not depend on dev-dependencies.
- StageBinaries => vec![(id, StageLibraries)],
+ Stage::Binaries => vec![(id, Stage::Libraries)],
// Tests depend on all dependencies (including dev-dependencies) in
// addition to the library stage for this package. Note, however,
// that library tests only need to depend the custom build command
// being run, not the libraries themselves.
- StageBinaryTests | StageLibraryTests => {
- let mut base = if stage == StageBinaryTests {
- vec![(id, StageLibraries)]
+ Stage::BinaryTests | Stage::LibraryTests => {
+ let mut base = if stage == Stage::BinaryTests {
+ vec![(id, Stage::Libraries)]
} else {
- vec![(id, StageRunCustomBuild)]
+ vec![(id, Stage::RunCustomBuild)]
};
- base.extend(deps.map(|(id, _)| (id, StageLibraries)));
+ base.extend(deps.map(|(id, _)| (id, Stage::Libraries)));
base
}
}
use util::{Require, Config, internal, ChainError, Fresh, profile, join_paths};
use self::job::{Job, Work};
-use self::job_queue as jq;
-use self::job_queue::JobQueue;
+use self::job_queue::{JobQueue, Stage};
pub use self::compilation::Compilation;
pub use self::context::Context;
-pub use self::context::{PlatformPlugin, PlatformPluginAndTarget};
-pub use self::context::{PlatformRequirement, PlatformTarget};
+pub use self::context::Platform;
pub use self::layout::{Layout, LayoutProxy};
pub use self::custom_build::BuildOutput;
mod links;
#[deriving(PartialEq, Eq, Hash, Show)]
-pub enum Kind { KindHost, KindTarget }
+pub enum Kind { Host, Target }
#[deriving(Default, Clone)]
pub struct BuildConfig {
try!(compile(targets, pkg, true, &mut cx, &mut queue));
// Clean out any old files sticking around in directories.
- try!(cx.layout(pkg, KindHost).proxy().clean());
- try!(cx.layout(pkg, KindTarget).proxy().clean());
+ try!(cx.layout(pkg, Kind::Host).proxy().clean());
+ try!(cx.layout(pkg, Kind::Target).proxy().clean());
// Now that we've figured out everything that we're going to do, do it!
try!(queue.execute(cx.config));
- let out_dir = cx.layout(pkg, KindTarget).build_out(pkg).display().to_string();
+ let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg).display().to_string();
cx.compilation.extra_env.insert("OUT_DIR".to_string(), Some(out_dir));
Ok(cx.compilation)
}
}
// Prepare the fingerprint directory as the first step of building a package
- let (target1, target2) = fingerprint::prepare_init(cx, pkg, KindTarget);
+ let (target1, target2) = fingerprint::prepare_init(cx, pkg, Kind::Target);
let mut init = vec![(Job::new(target1, target2), Fresh)];
if cx.config.target().is_some() {
- let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg, KindHost);
+ let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg, Kind::Host);
init.push((Job::new(plugin1, plugin2), Fresh));
}
- jobs.enqueue(pkg, jq::StageStart, init);
+ jobs.enqueue(pkg, Stage::Start, init);
// After the custom command has run, execute rustc for all targets of our
// package.
for &target in targets.iter() {
let work = if target.get_profile().is_doc() {
let rustdoc = try!(rustdoc(pkg, target, cx));
- vec![(rustdoc, KindTarget)]
+ vec![(rustdoc, Kind::Target)]
} else {
let req = cx.get_requirement(pkg, target);
try!(rustc(pkg, target, cx, req))
!t.get_profile().is_custom_build() && !t.get_profile().is_doc()
}).map(|&other_target| {
cx.get_requirement(pkg, other_target)
- }).unwrap_or(PlatformTarget);
+ }).unwrap_or(Platform::Target);
match requirement {
- PlatformTarget => reqs.push(PlatformTarget),
- PlatformPlugin => reqs.push(PlatformPlugin),
- PlatformPluginAndTarget => {
+ Platform::Target => reqs.push(Platform::Target),
+ Platform::Plugin => reqs.push(Platform::Plugin),
+ Platform::PluginAndTarget => {
if cx.config.target().is_some() {
- reqs.push(PlatformPlugin);
- reqs.push(PlatformTarget);
+ reqs.push(Platform::Plugin);
+ reqs.push(Platform::Target);
} else {
- reqs.push(PlatformPluginAndTarget);
+ reqs.push(Platform::PluginAndTarget);
}
}
}
let before = run_custom.len();
for &req in reqs.iter() {
- let kind = match req { PlatformPlugin => KindHost, _ => KindTarget };
+ let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target };
let key = (pkg.get_package_id().clone(), kind);
if pkg.get_manifest().get_links().is_some() &&
cx.build_state.outputs.lock().contains_key(&key) {
if targets.iter().any(|t| t.get_profile().is_custom_build()) {
// New custom build system
- jobs.enqueue(pkg, jq::StageBuildCustomBuild, build_custom);
- jobs.enqueue(pkg, jq::StageRunCustomBuild, run_custom);
+ jobs.enqueue(pkg, Stage::BuildCustomBuild, build_custom);
+ jobs.enqueue(pkg, Stage::RunCustomBuild, run_custom);
} else {
// Old custom build system
for cmd in build_cmds.into_iter() { try!(cmd(desc_tx.clone())) }
dirty(desc_tx)
};
- jobs.enqueue(pkg, jq::StageBuildCustomBuild, vec![]);
- jobs.enqueue(pkg, jq::StageRunCustomBuild, vec![(job(dirty, fresh),
+ jobs.enqueue(pkg, Stage::BuildCustomBuild, vec![]);
+ jobs.enqueue(pkg, Stage::RunCustomBuild, vec![(job(dirty, fresh),
freshness)]);
}
- jobs.enqueue(pkg, jq::StageLibraries, libs);
- jobs.enqueue(pkg, jq::StageBinaries, bins);
- jobs.enqueue(pkg, jq::StageBinaryTests, bin_tests);
- jobs.enqueue(pkg, jq::StageLibraryTests, lib_tests);
+ jobs.enqueue(pkg, Stage::Libraries, libs);
+ jobs.enqueue(pkg, Stage::Binaries, bins);
+ jobs.enqueue(pkg, Stage::BinaryTests, bin_tests);
+ jobs.enqueue(pkg, Stage::LibraryTests, lib_tests);
Ok(())
}
// TODO: this needs to be smarter about splitting
let mut cmd = cmd.split(' ');
- // TODO: this shouldn't explicitly pass `KindTarget` for dest/deps_dir, we
+ // TODO: this shouldn't explicitly pass `Kind::Target` for dest/deps_dir, we
// may be building a C lib for a plugin
- let layout = cx.layout(pkg, KindTarget);
+ let layout = cx.layout(pkg, Kind::Target);
let output = layout.native(pkg);
let mut p = try!(process(cmd.next().unwrap(), pkg, target, cx))
.env("OUT_DIR", Some(&output))
}
fn rustc(package: &Package, target: &Target,
- cx: &mut Context, req: PlatformRequirement)
+ cx: &mut Context, req: Platform)
-> CargoResult<Vec<(Work, Kind)> >{
let crate_types = target.rustc_crate_types();
let rustcs = try!(prepare_rustc(package, target, crate_types, cx, req));
}
fn prepare_rustc(package: &Package, target: &Target, crate_types: Vec<&str>,
- cx: &Context, req: PlatformRequirement)
+ cx: &Context, req: Platform)
-> CargoResult<Vec<(ProcessBuilder, Kind)>> {
let base = try!(process("rustc", package, target, cx));
let base = build_base_args(cx, base, package, target, crate_types.as_slice());
- let target_cmd = build_plugin_args(base.clone(), cx, package, target, KindTarget);
- let plugin_cmd = build_plugin_args(base, cx, package, target, KindHost);
+ let target_cmd = build_plugin_args(base.clone(), cx, package, target, Kind::Target);
+ let plugin_cmd = build_plugin_args(base, cx, package, target, Kind::Host);
let target_cmd = try!(build_deps_args(target_cmd, target, package, cx,
- KindTarget));
+ Kind::Target));
let plugin_cmd = try!(build_deps_args(plugin_cmd, target, package, cx,
- KindHost));
+ Kind::Host));
Ok(match req {
- PlatformTarget => vec![(target_cmd, KindTarget)],
- PlatformPlugin => vec![(plugin_cmd, KindHost)],
- PlatformPluginAndTarget if cx.config.target().is_none() =>
- vec![(target_cmd, KindTarget)],
- PlatformPluginAndTarget => vec![(target_cmd, KindTarget),
- (plugin_cmd, KindHost)],
+ Platform::Target => vec![(target_cmd, Kind::Target)],
+ Platform::Plugin => vec![(plugin_cmd, Kind::Host)],
+ Platform::PluginAndTarget if cx.config.target().is_none() =>
+ vec![(target_cmd, Kind::Target)],
+ Platform::PluginAndTarget => vec![(target_cmd, Kind::Target),
+ (plugin_cmd, Kind::Host)],
})
}
fn rustdoc(package: &Package, target: &Target,
cx: &mut Context) -> CargoResult<Work> {
- let kind = KindTarget;
+ let kind = Kind::Target;
let pkg_root = package.get_root();
let cx_root = cx.layout(package, kind).proxy().dest().join("doc");
let rustdoc = try!(process("rustdoc", package, target, cx)).cwd(pkg_root.clone());
let dep_info_loc = fingerprint::dep_info_loc(cx, pkg, target, kind);
cmd = cmd.arg("--dep-info").arg(dep_info_loc);
- if kind == KindTarget {
+ if kind == Kind::Target {
fn opt(cmd: ProcessBuilder, key: &str, prefix: &str,
val: Option<&str>) -> ProcessBuilder {
match val {
// plugin, then we want the plugin directory. Otherwise we want the
// target directory (hence the || here).
let layout = cx.layout(pkg, match kind {
- KindHost => KindHost,
- KindTarget if target.get_profile().is_for_host() => KindHost,
- KindTarget => KindTarget,
+ Kind::Host => Kind::Host,
+ Kind::Target if target.get_profile().is_for_host() => Kind::Host,
+ Kind::Target => Kind::Target,
});
for filename in try!(cx.target_filenames(target)).iter() {
cx: &Context) -> CargoResult<ProcessBuilder> {
// When invoking a tool, we need the *host* deps directory in the dynamic
// library search path for plugins and such which have dynamic dependencies.
- let layout = cx.layout(pkg, KindHost);
+ let layout = cx.layout(pkg, Kind::Host);
let mut search_path = DynamicLibrary::search_path();
search_path.push(layout.deps().clone());
pub use self::cargo_compile::{compile, compile_pkg, CompileOptions};
pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages};
pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind, rustc_version};
-pub use self::cargo_rustc::{KindTarget, KindHost, Context, LayoutProxy};
-pub use self::cargo_rustc::{PlatformRequirement, PlatformTarget};
-pub use self::cargo_rustc::{PlatformPlugin, PlatformPluginAndTarget};
+pub use self::cargo_rustc::{Context, LayoutProxy};
+pub use self::cargo_rustc::Platform;
pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig};
pub use self::cargo_run::run;
pub use self::cargo_new::{new, NewOptions};
use sources::{PathSource, RegistrySource};
use util::config;
use util::{CargoResult, human, internal, ChainError, Require, ToUrl};
-use util::config::{Config, Table};
+use util::config::{Config, ConfigValue, Location};
pub struct RegistryConfig {
pub index: Option<String>,
let p = os::getcwd();
match index {
Some(index) => {
- map.insert("index".to_string(), config::String(index, p.clone()));
+ map.insert("index".to_string(), ConfigValue::String(index, p.clone()));
}
None => {}
}
- map.insert("token".to_string(), config::String(token, p));
+ map.insert("token".to_string(), ConfigValue::String(token, p));
- config::set_config(&config, config::Global, "registry", config::Table(map))
+ config::set_config(&config, Location::Global, "registry",
+ ConfigValue::Table(map))
}
pub struct OwnersOptions {
use core::{Package, PackageId, SourceId};
use core::registry::PackageRegistry;
-use core::resolver::{mod, Resolve};
+use core::resolver::{mod, Resolve, Method};
use ops;
use util::CargoResult;
-> CargoResult<Resolve> {
let prev = try!(ops::load_pkg_lockfile(package));
let resolve = try!(resolve_with_previous(registry, package,
- resolver::ResolveEverything,
+ Method::Everything,
prev.as_ref(), None));
try!(ops::write_pkg_lockfile(package, &resolve));
Ok(resolve)
/// read or write lockfiles from the filesystem.
pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
package: &Package,
- method: resolver::ResolveMethod,
+ method: Method,
previous: Option<&'a Resolve>,
to_avoid: Option<&HashSet<&'a PackageId>>)
-> CargoResult<Resolve> {
use core::{Package, PackageId, Summary, Registry, Dependency};
use util::{CargoResult, Config, to_hex};
use sources::PathSource;
-use sources::git::utils::{GitReference, GitRemote, Master, Other, GitRevision};
+use sources::git::utils::{GitReference, GitRemote, GitRevision};
/* TODO: Refactor GitSource to delegate to a PathSource
*/
try!(write!(f, "git repo at {}", self.remote.get_url()));
match self.reference {
- Master => Ok(()),
- Other(ref reference) => write!(f, " ({})", reference)
+ GitReference::Master => Ok(()),
+ GitReference::Other(ref reference) => write!(f, " ({})", reference)
}
}
}
impl GitReference {
pub fn for_str<S: Str>(string: S) -> GitReference {
if string.as_slice() == "master" {
- Master
+ GitReference::Master
} else {
- Other(string.as_slice().to_string())
+ GitReference::Other(string.as_slice().to_string())
}
}
}
impl Str for GitReference {
fn as_slice(&self) -> &str {
match *self {
- Master => "master",
- Other(ref string) => string.as_slice()
+ GitReference::Master => "master",
+ GitReference::Other(ref string) => string.as_slice()
}
}
}
use util::toml as cargo_toml;
+use self::ConfigValue as CV;
+
pub struct Config<'a> {
home_path: Path,
shell: RefCell<&'a mut MultiShell>,
impl fmt::Show for ConfigValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
- String(ref string, ref path) => {
+ CV::String(ref string, ref path) => {
write!(f, "{} (from {})", string, path.display())
}
- List(ref list) => {
+ CV::List(ref list) => {
try!(write!(f, "["));
for (i, &(ref s, ref path)) in list.iter().enumerate() {
if i > 0 { try!(write!(f, ", ")); }
}
write!(f, "]")
}
- Table(ref table) => write!(f, "{}", table),
- Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()),
+ CV::Table(ref table) => write!(f, "{}", table),
+ CV::Boolean(b, ref path) => {
+ write!(f, "{} (from {})", b, path.display())
+ }
}
}
}
impl<E, S: Encoder<E>> Encodable<S, E> for ConfigValue {
fn encode(&self, s: &mut S) -> Result<(), E> {
match *self {
- String(ref string, _) => string.encode(s),
- List(ref list) => {
+ CV::String(ref string, _) => string.encode(s),
+ CV::List(ref list) => {
let list: Vec<&string::String> = list.iter().map(|s| s.ref0()).collect();
list.encode(s)
}
- Table(ref table) => table.encode(s),
- Boolean(b, _) => b.encode(s),
+ CV::Table(ref table) => table.encode(s),
+ CV::Boolean(b, _) => b.encode(s),
}
}
}
impl ConfigValue {
fn from_toml(path: &Path, toml: toml::Value) -> CargoResult<ConfigValue> {
match toml {
- toml::String(val) => Ok(String(val, path.clone())),
- toml::Boolean(b) => Ok(Boolean(b, path.clone())),
+ toml::String(val) => Ok(CV::String(val, path.clone())),
+ toml::Boolean(b) => Ok(CV::Boolean(b, path.clone())),
toml::Array(val) => {
- Ok(List(try!(val.into_iter().map(|toml| {
+ Ok(CV::List(try!(val.into_iter().map(|toml| {
match toml {
toml::String(val) => Ok((val, path.clone())),
_ => Err(internal("")),
}).collect::<CargoResult<_>>())))
}
toml::Table(val) => {
- Ok(Table(try!(val.into_iter().map(|(key, value)| {
- let value = raw_try!(ConfigValue::from_toml(path, value));
+ Ok(CV::Table(try!(val.into_iter().map(|(key, value)| {
+ let value = raw_try!(CV::from_toml(path, value));
Ok((key, value))
}).collect::<CargoResult<_>>())))
}
fn merge(&mut self, from: ConfigValue) -> CargoResult<()> {
match (self, from) {
- (&String(..), String(..)) |
- (&Boolean(..), Boolean(..)) => {}
- (&List(ref mut old), List(ref mut new)) => {
+ (&CV::String(..), CV::String(..)) |
+ (&CV::Boolean(..), CV::Boolean(..)) => {}
+ (&CV::List(ref mut old), CV::List(ref mut new)) => {
let new = mem::replace(new, Vec::new());
old.extend(new.into_iter());
}
- (&Table(ref mut old), Table(ref mut new)) => {
+ (&CV::Table(ref mut old), CV::Table(ref mut new)) => {
let new = mem::replace(new, HashMap::new());
for (key, value) in new.into_iter() {
match old.entry(key) {
pub fn string(&self) -> CargoResult<(&str, &Path)> {
match *self {
- String(ref s, ref p) => Ok((s.as_slice(), p)),
+ CV::String(ref s, ref p) => Ok((s.as_slice(), p)),
_ => Err(internal(format!("expected a string, but found a {}",
self.desc()))),
}
pub fn table(&self) -> CargoResult<&HashMap<string::String, ConfigValue>> {
match *self {
- Table(ref table) => Ok(table),
+ CV::Table(ref table) => Ok(table),
_ => Err(internal(format!("expected a table, but found a {}",
self.desc()))),
}
pub fn list(&self) -> CargoResult<&[(string::String, Path)]> {
match *self {
- List(ref list) => Ok(list.as_slice()),
+ CV::List(ref list) => Ok(list.as_slice()),
_ => Err(internal(format!("expected a list, but found a {}",
self.desc()))),
}
pub fn boolean(&self) -> CargoResult<(bool, &Path)> {
match *self {
- Boolean(b, ref p) => Ok((b, p)),
+ CV::Boolean(b, ref p) => Ok((b, p)),
_ => Err(internal(format!("expected a bool, but found a {}",
self.desc()))),
}
pub fn desc(&self) -> &'static str {
match *self {
- Table(..) => "table",
- List(..) => "array",
- String(..) => "string",
- Boolean(..) => "boolean",
+ CV::Table(..) => "table",
+ CV::List(..) => "array",
+ CV::String(..) => "string",
+ CV::Boolean(..) => "boolean",
}
}
fn into_toml(self) -> toml::Value {
match self {
- Boolean(s, _) => toml::Boolean(s),
- String(s, _) => toml::String(s),
- List(l) => toml::Array(l.into_iter().map(|(s, _)| toml::String(s))
+ CV::Boolean(s, _) => toml::Boolean(s),
+ CV::String(s, _) => toml::String(s),
+ CV::List(l) => toml::Array(l.into_iter().map(|(s, _)| toml::String(s))
.collect()),
- Table(l) => toml::Table(l.into_iter()
- .map(|(k, v)| (k, v.into_toml()))
- .collect()),
+ CV::Table(l) => toml::Table(l.into_iter()
+ .map(|(k, v)| (k, v.into_toml()))
+ .collect()),
}
}
}
}
pub fn all_configs(pwd: Path) -> CargoResult<HashMap<string::String, ConfigValue>> {
- let mut cfg = Table(HashMap::new());
+ let mut cfg = CV::Table(HashMap::new());
try!(walk_tree(&pwd, |mut file| {
let path = file.path().clone();
internal(format!("could not parse Toml manifest; path={}",
path.display()))
}));
- let value = try!(ConfigValue::from_toml(&path, toml::Table(table)));
+ let value = try!(CV::from_toml(&path, toml::Table(table)));
try!(cfg.merge(value));
Ok(())
}).chain_error(|| human("Couldn't load Cargo configuration")));
match cfg {
- Table(map) => Ok(map),
+ CV::Table(map) => Ok(map),
_ => unreachable!(),
}
}
let mut toml = try!(cargo_toml::parse(contents.as_slice(), file.path()));
let val = try!(toml.remove(&key.to_string()).require(|| internal("")));
- ConfigValue::from_toml(file.path(), val)
+ CV::from_toml(file.path(), val)
}
pub fn set_config(cfg: &Config, loc: Location, key: &str,
// 2. This blows away all comments in a file
// 3. This blows away the previous ordering of a file.
let file = match loc {
- Global => cfg.home_path.join(".cargo").join("config"),
- Project => unimplemented!(),
+ Location::Global => cfg.home_path.join(".cargo").join("config"),
+ Location::Project => unimplemented!(),
};
let contents = File::open(&file).read_to_string().unwrap_or("".to_string());
let mut toml = try!(cargo_toml::parse(contents.as_slice(), &file));
use std::collections::hash_map::{Occupied, Vacant};
use std::hash::Hash;
+pub use self::Freshness::{Fresh, Dirty};
+
pub struct DependencyQueue<K, V> {
/// A list of all known keys to build.
///
return;
}
- marks.insert(node.clone(), InProgress);
+ marks.insert(node.clone(), Mark::InProgress);
for child in self.nodes[*node].iter() {
self.visit(child, dst, marks);
}
dst.push(node.clone());
- marks.insert(node.clone(), Done);
+ marks.insert(node.clone(), Mark::Done);
}
pub fn iter(&self) -> Nodes<N> {
use core::SourceId;
use core::{Summary, Manifest, Target, Dependency, PackageId};
-use core::dependency::{Build, Development};
-use core::manifest::{LibKind, Lib, Dylib, Profile, ManifestMetadata};
+use core::dependency::Kind;
+use core::manifest::{LibKind, Profile, ManifestMetadata};
use core::package_id::Metadata;
use util::{CargoResult, Require, human, ToUrl, ToSemver};
#[deriving(Decodable)]
pub enum TomlDependency {
- SimpleDep(String),
- DetailedDep(DetailedTomlDependency)
+ Simple(String),
+ Detailed(DetailedTomlDependency)
}
impl<T> ManyOrOne<T> {
fn as_slice(&self) -> &[T] {
match *self {
- Many(ref v) => v.as_slice(),
- One(ref t) => slice::ref_slice(t),
+ ManyOrOne::Many(ref v) => v.as_slice(),
+ ManyOrOne::One(ref t) => slice::ref_slice(t),
}
}
}
name: String,
version: TomlVersion,
authors: Vec<String>,
- build: Option<TomlBuildCommandsList>, // TODO: `String` instead
+ build: Option<BuildCommand>, // TODO: `String` instead
links: Option<String>,
exclude: Option<Vec<String>>,
// TODO: deprecated, remove
#[deriving(Decodable)]
-pub enum TomlBuildCommandsList {
- SingleBuildCommand(String),
- MultipleBuildCommands(Vec<String>)
+pub enum BuildCommand {
+ Single(String),
+ Multiple(Vec<String>)
}
pub struct TomlVersion {
layout.lib.as_ref().map(|lib| {
vec![TomlTarget {
name: name.to_string(),
- path: Some(TomlPath(lib.clone())),
+ path: Some(PathValue::Path(lib.clone())),
.. TomlTarget::new()
}]
}).unwrap_or(Vec::new())
name.map(|name| {
TomlTarget {
name: name,
- path: Some(TomlPath(bin.clone())),
+ path: Some(PathValue::Path(bin.clone())),
.. TomlTarget::new()
}
})
ex.filestem_str().map(|name| {
TomlTarget {
name: name.to_string(),
- path: Some(TomlPath(ex.clone())),
+ path: Some(PathValue::Path(ex.clone())),
.. TomlTarget::new()
}
})
ex.filestem_str().map(|name| {
TomlTarget {
name: name.to_string(),
- path: Some(TomlPath(ex.clone())),
+ path: Some(PathValue::Path(ex.clone())),
.. TomlTarget::new()
}
})
ex.filestem_str().map(|name| {
TomlTarget {
name: name.to_string(),
- path: Some(TomlPath(ex.clone())),
+ path: Some(PathValue::Path(ex.clone())),
.. TomlTarget::new()
}
})
let lib = match self.lib {
Some(ref libs) => {
match *libs {
- Many(..) => used_deprecated_lib = true,
+ ManyOrOne::Many(..) => used_deprecated_lib = true,
_ => {}
}
libs.as_slice().iter().map(|t| {
if layout.lib.is_some() && t.path.is_none() {
TomlTarget {
- path: layout.lib.as_ref().map(|p| TomlPath(p.clone())),
+ path: layout.lib.as_ref().map(|p| PathValue::Path(p.clone())),
.. t.clone()
}
} else {
bins.iter().map(|t| {
if bin.is_some() && t.path.is_none() {
TomlTarget {
- path: bin.as_ref().map(|&p| TomlPath(p.clone())),
+ path: bin.as_ref().map(|&p| PathValue::Path(p.clone())),
.. t.clone()
}
} else {
// processing the custom build script
let (new_build, old_build) = match project.build {
- Some(SingleBuildCommand(ref cmd)) => {
+ Some(BuildCommand::Single(ref cmd)) => {
if cmd.as_slice().ends_with(".rs") && layout.root.join(cmd.as_slice()).exists() {
(Some(Path::new(cmd.as_slice())), Vec::new())
} else {
(None, vec!(cmd.clone()))
}
},
- Some(MultipleBuildCommands(ref cmd)) => (None, cmd.clone()),
+ Some(BuildCommand::Multiple(ref cmd)) => (None, cmd.clone()),
None => (None, Vec::new())
};
try!(process_dependencies(&mut cx, self.dependencies.as_ref(),
|dep| dep));
try!(process_dependencies(&mut cx, self.dev_dependencies.as_ref(),
- |dep| dep.kind(Development)));
+ |dep| dep.kind(Kind::Development)));
try!(process_dependencies(&mut cx, self.build_dependencies.as_ref(),
- |dep| dep.kind(Build)));
+ |dep| dep.kind(Kind::Build)));
if let Some(targets) = self.target.as_ref() {
for (name, platform) in targets.iter() {
};
for (n, v) in dependencies.iter() {
let details = match *v {
- SimpleDep(ref version) => {
+ TomlDependency::Simple(ref version) => {
let mut d: DetailedTomlDependency = Default::default();
d.version = Some(version.clone());
d
}
- DetailedDep(ref details) => details.clone(),
+ TomlDependency::Detailed(ref details) => details.clone(),
};
let reference = details.branch.clone()
.or_else(|| details.tag.clone())
struct TomlTarget {
name: String,
crate_type: Option<Vec<String>>,
- path: Option<TomlPathValue>,
+ path: Option<PathValue>,
test: Option<bool>,
doctest: Option<bool>,
bench: Option<bool>,
}
#[deriving(Decodable, Clone)]
-enum TomlPathValue {
- TomlString(String),
- TomlPath(Path),
+enum PathValue {
+ String(String),
+ Path(Path),
}
/// Corresponds to a `target` entry, but `TomlTarget` is already used.
}
}
-impl TomlPathValue {
+impl PathValue {
fn to_path(&self) -> Path {
match *self {
- TomlString(ref s) => Path::new(s.as_slice()),
- TomlPath(ref p) => p.clone(),
+ PathValue::String(ref s) => Path::new(s.as_slice()),
+ PathValue::Path(ref p) => p.clone(),
}
}
}
-impl fmt::Show for TomlPathValue {
+impl fmt::Show for PathValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
- TomlString(ref s) => s.fmt(f),
- TomlPath(ref p) => p.display().fmt(f),
+ PathValue::String(ref s) => s.fmt(f),
+ PathValue::Path(ref p) => p.display().fmt(f),
}
}
}
}
match dep {
- Needed => {
+ TestDep::Needed => {
ret.push(merge(Profile::default_test().test(false),
&profiles.test));
ret.push(merge(Profile::default_doc().doc(false),
dep: TestDep, metadata: &Metadata, profiles: &TomlProfiles) {
let l = &libs[0];
let path = l.path.clone().unwrap_or_else(|| {
- TomlString(format!("src/{}.rs", l.name))
+ PathValue::String(format!("src/{}.rs", l.name))
});
let crate_types = l.crate_type.clone().and_then(|kinds| {
LibKind::from_strs(kinds).ok()
}).unwrap_or_else(|| {
- vec![if l.plugin == Some(true) {Dylib} else {Lib}]
+ vec![if l.plugin == Some(true) {LibKind::Dylib} else {LibKind::Lib}]
});
for profile in target_profiles(l, profiles, dep).iter() {
default: |&TomlBinTarget| -> String) {
for bin in bins.iter() {
let path = bin.path.clone().unwrap_or_else(|| {
- TomlString(default(bin))
+ PathValue::String(default(bin))
});
for profile in target_profiles(bin, profiles, dep).iter() {
profiles: &TomlProfiles,
default: |&TomlExampleTarget| -> String) {
for ex in examples.iter() {
- let path = ex.path.clone().unwrap_or_else(|| TomlString(default(ex)));
+ let path = ex.path.clone().unwrap_or_else(|| PathValue::String(default(ex)));
let profile = Profile::default_test().test(false);
let profile = merge(profile, &profiles.test);
default: |&TomlTestTarget| -> String) {
for test in tests.iter() {
let path = test.path.clone().unwrap_or_else(|| {
- TomlString(default(test))
+ PathValue::String(default(test))
});
let harness = test.harness.unwrap_or(true);
default: |&TomlBenchTarget| -> String) {
for bench in benches.iter() {
let path = bench.path.clone().unwrap_or_else(|| {
- TomlString(default(bench))
+ PathValue::String(default(bench))
});
let harness = bench.harness.unwrap_or(true);
let mut ret = Vec::new();
let test_dep = if examples.len() > 0 || tests.len() > 0 || benches.len() > 0 {
- Needed
+ TestDep::Needed
} else {
- NotNeeded
+ TestDep::NotNeeded
};
match (libs, bins) {
([_, ..], [_, ..]) => {
- lib_targets(&mut ret, libs, Needed, metadata, profiles);
+ lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles);
bin_targets(&mut ret, bins, test_dep, metadata, profiles,
|bin| format!("src/bin/{}.rs", bin.name));
},
([_, ..], []) => {
- lib_targets(&mut ret, libs, Needed, metadata, profiles);
+ lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles);
},
([], [_, ..]) => {
bin_targets(&mut ret, bins, test_dep, metadata, profiles,
pub type Result<T> = result::Result<T, Error>;
pub enum Error {
- CurlError(curl::ErrCode),
+ Curl(curl::ErrCode),
NotOkResponse(http::Response),
NonUtf8Body,
- ApiErrors(Vec<String>),
+ Api(Vec<String>),
Unauthorized,
- IoError(io::IoError),
+ Io(io::IoError),
}
#[deriving(Encodable)]
// <json request> (metadata for the package)
// <le u32 of tarball>
// <source tarball>
- let stat = try!(fs::stat(tarball).map_err(IoError));
+ let stat = try!(fs::stat(tarball).map_err(Error::Io));
let header = {
let mut w = MemWriter::new();
w.write_le_u32(json.len() as u32).unwrap();
w.write_le_u32(stat.size as u32).unwrap();
MemReader::new(w.unwrap())
};
- let tarball = try!(File::open(tarball).map_err(IoError));
+ let tarball = try!(File::open(tarball).map_err(Error::Io));
let size = stat.size as uint + header.get_ref().len();
let mut body = ChainedReader::new(vec![box header as Box<Reader>,
box tarball as Box<Reader>].into_iter());
fn handle(response: result::Result<http::Response, curl::ErrCode>)
-> Result<String> {
- let response = try!(response.map_err(CurlError));
+ let response = try!(response.map_err(Error::Curl));
match response.get_code() {
0 => {} // file upload url sometimes
200 => {}
- 403 => return Err(Unauthorized),
- _ => return Err(NotOkResponse(response))
+ 403 => return Err(Error::Unauthorized),
+ _ => return Err(Error::NotOkResponse(response))
}
let body = match String::from_utf8(response.move_body()) {
Ok(body) => body,
- Err(..) => return Err(NonUtf8Body),
+ Err(..) => return Err(Error::NonUtf8Body),
};
match json::decode::<ApiErrorList>(body.as_slice()) {
Ok(errors) => {
- return Err(ApiErrors(errors.errors.into_iter().map(|s| s.detail)
- .collect()))
+ return Err(Error::Api(errors.errors.into_iter().map(|s| s.detail)
+ .collect()))
}
Err(..) => {}
}
impl fmt::Show for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
- NonUtf8Body => write!(f, "reponse body was not utf-8"),
- CurlError(ref err) => write!(f, "http error: {}", err),
- NotOkResponse(ref resp) => {
+ Error::NonUtf8Body => write!(f, "reponse body was not utf-8"),
+ Error::Curl(ref err) => write!(f, "http error: {}", err),
+ Error::NotOkResponse(ref resp) => {
write!(f, "failed to get a 200 OK response: {}", resp)
}
- ApiErrors(ref errs) => {
+ Error::Api(ref errs) => {
write!(f, "api errors: {}", errs.connect(", "))
}
- Unauthorized => write!(f, "unauthorized API access"),
- IoError(ref e) => write!(f, "io error: {}", e),
+ Error::Unauthorized => write!(f, "unauthorized API access"),
+ Error::Io(ref e) => write!(f, "io error: {}", e),
}
}
}
use cargo::core::dependency::Development;
use cargo::core::{Dependency, PackageId, Summary, Registry};
use cargo::util::{CargoResult, ToUrl};
-use cargo::core::resolver::{mod, ResolveEverything};
+use cargo::core::resolver::{mod, Method};
fn resolve<R: Registry>(pkg: PackageId, deps: Vec<Dependency>,
registry: &mut R)
-> CargoResult<Vec<PackageId>> {
let summary = Summary::new(pkg, deps, HashMap::new()).unwrap();
- let method = ResolveEverything;
+ let method = Method::Everything;
Ok(try!(resolver::resolve(&summary, method, registry)).iter().map(|p| {
p.clone()
}).collect())
for f in ar.files().assert() {
let f = f.assert();
let fname = f.filename_bytes();
- assert!(fname == Path::new("foo-0.0.1/Cargo.toml").as_vec() ||
- fname == Path::new("foo-0.0.1/src/main.rs").as_vec(),
+ assert!(fname == b"foo-0.0.1/Cargo.toml" ||
+ fname == b"foo-0.0.1/src/main.rs",
"unexpected filename: {}", f.filename())
}
})
for file in ar.files().unwrap() {
let file = file.unwrap();
let fname = file.filename_bytes();
- assert!(fname == Path::new("foo-0.0.1/Cargo.toml").as_vec() ||
- fname == Path::new("foo-0.0.1/src/main.rs").as_vec(),
+ assert!(fname == b"foo-0.0.1/Cargo.toml" ||
+ fname == b"foo-0.0.1/src/main.rs",
"unexpected filename: {}", file.filename())
}
})